Import scripts of project

In [1]:
# -- Import other scripts
import functions as fn
import visualizations as vs
import data as dt

# -- basic functions
import pandas as pd
import numpy as np
import random

# -- file operations
from os import listdir, path
from os.path import isfile, join

# -- complementary
from rich import print
from rich import inspect

# Reproducible results
random.seed(123)

# visualize plotly in notebook
import plotly.io as pio            # to define input-output of plots
pio.renderers.default = "notebook"  # to render the plot locally in your default web 
meh

LOAD PRICES, FOLDS AND PROCESS RESULTS DATA

In [2]:
# Route to backup files folder
dir_route = 'files/backups/ludwig/test_1_09042021/'

# Available files with experiment data
abspath = path.abspath(dir_route)
experiment_files = sorted([f for f in listdir(abspath) if isfile(join(abspath, f))])

# Experiments to show
# [11, 0.9, 0.5, 'all']

# Experiment file  
experiment = 11

# Fold case
fold_case = dt.fold_cases[experiment_files[experiment][0]]

# Final route
file_route = dir_route + experiment_files[experiment]

# Historical prices
historical_prices = dt.ohlc_data

# Timeseries data division in t-folds
folds = fn.t_folds(p_data=historical_prices, p_period=fold_case)

# Load data (previously generated results)
memory_palace = dt.data_pickle(p_data_objects=None, p_data_action='load', p_data_file=file_route)
memory_palace = memory_palace['memory_palace']

# List with the names of the models
ml_models = list(dt.models.keys())
/home/franciscome/.local/lib/python3.8/site-packages/sklearn/base.py:310: UserWarning:

Trying to unpickle estimator LogisticRegression from version 0.24.1 when using version 0.24.0. This might lead to breaking code or invalid results. Use at your own risk.

TEXT DESCRIPTION OF EXPERIMENT

Description of results for the founded result TEXT Fold size, Cost function, feature transformation, train-val proportion, embargo

PLOT TIME SERIES BLOCK T-FOLDS

In [3]:
# Dates for vertical lines in the T-Folds plot
dates_folds = []
for n_fold in list(folds.keys()):
    dates_folds.append(folds[n_fold]['timestamp'].iloc[0])
    dates_folds.append(folds[n_fold]['timestamp'].iloc[-1])

# Plot_1 with t-folds vertical lines
plot_2 = vs.plot_ohlc(p_ohlc=historical_prices, p_theme=dt.theme_plot_2, p_vlines=dates_folds)

# Show plot in script
plot_2.show()

EXPERIMENT 1: OOS GENERALIZATION

In [4]:
# Filtered cases
filters = {'filter_1': {'metric': 'acc-train', 'objective': 'above_threshold', 'threshold': 0.90},
           'filter_2': {'metric': 'acc-val', 'objective': 'above_threshold', 'threshold': 0.50},
           'filter_3': {'metric': 'acc-diff', 'objective': 'all'}}

# metric type for MIN, MAX, MODE
metric_case = 'acc-train'

# -- get MIN, MAX, MODE, FILTERED Cases
met_cases = fn.model_cases(p_models=ml_models, p_global_cases=memory_palace, 
                           p_data_folds=folds, p_cases_type=metric_case, p_filters=filters)
 
for i in range(0, len(ml_models)):
    model_case = ml_models[i]
    
    # periods with at least 1 matching case
    filtered_periods = met_cases[model_case]['met_filter']['period']

    # in case of at least 1 period found
    if len(filtered_periods) > 0:
        data = []
        # concatenate all ocurrences
        for i_period in filtered_periods:
            data.append(met_cases[model_case]['met_filter']['data'][i_period]['metrics'])
            df_filtered = pd.concat(data, axis=1)

        # format to output dataframe
        df_filtered.index.name=model_case + '-metrics'
        df_filtered.head()

DATA PROFILES

In [5]:
# Fold to make a description
des_fold = df_filtered.columns[0][:-2]

# TABLE data profile (Target)
exp_train_y = memory_palace[des_fold]['features']['train_y']
exp_val_y = memory_palace[des_fold]['features']['val_y']

tabla_1 = fn.data_profile(p_data=exp_train_y, p_type='target', p_mult=10000)
display(tabla_1)
tabla_2 = fn.data_profile(p_data=exp_val_y, p_type='target', p_mult=10000)
display(tabla_2)
0    350
1    297
Name: cod_t1, dtype: int64
0    94
1    68
Name: cod_t1, dtype: int64
In [16]:
# TABLE data profile (Inputs)
exp_train_x = memory_palace[des_fold]['features']['train_x']
exp_val_x = memory_palace[des_fold]['features']['val_x']

tabla_3 = fn.data_profile(p_data=exp_train_x, p_type='target', p_mult=10000)
display(tabla_3.head())
tabla_4 = fn.data_profile(p_data=exp_val_x, p_type='target', p_mult=10000)
display(tabla_4.head())
co         cov        hlv        ma_ol      ma_ho      ma_hl      ma_hlv     ma_cov     lag_ol_1   lag_ho_1   lag_hl_1   lag_hlv_1  lag_cov_1  lag_vol_1  sum_vol_1  mean_vol_1  lag_ol_2   lag_ho_2   lag_hl_2   lag_hlv_2  lag_cov_2  sd_ol_2    sd_ho_2    sd_hl_2    sd_hlv_2   sd_cov_2   lag_vol_2  sum_vol_2  mean_vol_2  lag_ol_3   lag_ho_3   lag_hl_3   lag_hlv_3  lag_cov_3  sd_ol_3    sd_ho_3    sd_hl_3    sd_hlv_3   sd_cov_3   lag_vol_3  sum_vol_3  mean_vol_3  lag_ol_4   lag_ho_4   lag_hl_4   lag_hlv_4  lag_cov_4  sd_ol_4    sd_ho_4    sd_hl_4    sd_hlv_4   sd_cov_4   lag_vol_4  sum_vol_4  mean_vol_4  sym_p_0    sym_p_1    sym_p_2    sym_p_3    sym_p_4    sym_p_5    sym_p_6    sym_p_7   sym_p_8   sym_p_9   sym_p_10  sym_p_11  sym_p_12  sym_p_13  sym_p_14  sym_p_15  sym_p_16  sym_p_17  sym_p_18  sym_p_19  sym_p_20  sym_p_21  sym_p_22  sym_p_23  sym_p_24  sym_p_25  sym_p_26  sym_p_27  sym_p_28  sym_p_29  sym_s_20   sym_s_21   sym_s_22   sym_s_23   sym_s_24   sym_s_25   sym_s_26   sym_s_27   sym_s_28   sym_s_29 
-4.110319  -0.018709  -0.537537   2.604803   4.546577   5.520764  -0.589153   0.458503   0.049043   3.566181   2.964592  -0.328628  0.157002    0.305164   4.586343   4.586343   -0.392734   2.071845   1.386346   0.465281   0.823211   4.101019   1.047267   1.596771  -0.536907  -0.412202  -0.613611   4.161734   4.161734   -0.963741   4.829068   3.196902  -0.560431   0.157091   3.665419   0.415897   2.296904  -0.217971  -0.140799   3.753808   4.157185   4.157185    1.771272  -0.745346   0.803939  -0.348111  -0.086810   3.529061   1.293269   1.822224  -0.289208  -0.275587  -0.192906   5.583071   5.583071    0.161721   0.161721   0.161721   0.161721   0.161721   0.161721   0.161721  0.148774  0.148774  0.148774  0.148774  0.148774  0.148774  0.148774  0.148774  0.148774  0.148774  0.148774  0.148774  0.148774  0.148774  0.148774  0.148774  0.148774  0.148774  0.148774  0.148774  0.148774  0.148774  0.148774  -0.069905  -0.069905  -0.069905  -0.069905  -0.069905  -0.069905  -0.069905  -0.069905  -0.069905  -0.069905    1
 0.213031   0.345547   0.078386   0.651596  -0.230797   0.273066   0.146031  -0.652769  -0.205944   1.339100   0.934106  -0.498924  0.157494    0.548066  -0.757935  -0.757935    1.830110  -0.513204   1.040442  -0.084747  -0.372775  -0.571266   1.029181   0.895887  -0.225874  -0.398683  -0.455705  -0.178663  -0.178663   -0.062129  -0.634895  -0.571202   1.274996  -1.535842   0.785747   0.538932   0.509892  -0.522737  -0.224450  -0.781158  -0.646697  -0.646697    0.463977  -0.123199   0.269735  -0.442730  -0.061504   0.443577   0.276449   0.372341   0.089471   0.312574  -0.102023  -1.005547  -1.005547    0.166310   0.166310   0.166310   0.166310   0.166310   0.166310   0.166310  0.272390  0.272390  0.272390  0.272390  0.272390  0.272390  0.272390  0.272390  0.272390  0.272390  0.272390  0.272390  0.272390  0.272390  0.272390  0.272390  0.272390  0.272390  0.272390  0.272390  0.272390  0.272390  0.272390  -0.079218  -0.079218  -0.079218  -0.079218  -0.079218  -0.079218  -0.079218  -0.079218  -0.079218  -0.079218    1
 0.266900   0.707986   1.347686  -0.472801  -0.122623  -0.433064   1.026423   0.148273  -0.960150  -0.798893  -1.421804   0.687918  0.061881   -0.811229  -0.786322  -0.786322   -0.413110   1.036861   0.520851  -0.546939   0.048077  -0.298667  -0.724896  -0.305938  -0.156348  -0.030470   0.904389  -1.359076  -1.359076    1.028015   0.296096   1.064704  -0.307453  -0.162143  -0.655993   0.313675   0.206144   0.343009  -0.219096  -0.205596  -0.673764  -0.673764   -0.404630  -0.360103  -0.619097   1.098200  -0.318793   0.050723  -0.015703   0.560568   0.250937  -0.267259  -0.776751  -0.624970  -0.624970    0.530679   0.530679   0.530679   0.530679   0.530679   0.530679   0.530679  0.727680  0.727680  0.727680  0.727680  0.727680  0.727680  0.727680  0.727680  0.727680  0.727680  0.727680  0.727680  0.727680  0.727680  0.727680  0.727680  0.727680  0.727680  0.727680  0.727680  0.727680  0.727680  0.727680  -0.040532  -0.040532  -0.040532  -0.040532  -0.040532  -0.040532  -0.040532  -0.040532  -0.040532  -0.040532    1
 0.267513   0.377794   0.250026  -0.911887   1.079608   0.230588   0.064937   0.554235  -0.745774   3.887424   2.593646  -0.422627  0.224427    0.657148  -0.747128  -0.747128   -0.318588  -0.278938  -0.483299   0.296093  -0.109776  -0.919152   3.211931   2.373595  -0.144365  -0.426654  -0.730175  -0.076680  -0.076680   -0.856595  -0.385832  -1.001442   0.580262   0.686765  -0.818935   2.777142   1.827900  -0.385583  -0.372491  -0.784737  -0.796823  -0.796823    0.631049  -0.388338   0.185602  -0.556403   0.067520  -0.973036   2.490570   1.741635  -0.367589  -0.324768   0.762850  -1.115455  -1.115455    0.426135   0.426135   0.426135   0.426135   0.426135   0.426135   0.426135  0.550476  0.550476  0.550476  0.550476  0.550476  0.550476  0.550476  0.550476  0.550476  0.550476  0.550476  0.550476  0.550476  0.550476  0.550476  0.550476  0.550476  0.550476  0.550476  0.550476  0.550476  0.550476  0.550476   0.125424   0.125424   0.125424   0.125424   0.125424   0.125424   0.125424   0.125424   0.125424   0.125424    1
 0.268024   1.028980   0.984260  -0.871305  -0.999553  -1.424545   0.260328   1.064257  -0.850345  -0.153654  -0.804842  -0.591023  0.095603    0.382579  -0.798650  -0.798650    0.474527  -0.796912  -0.274921  -0.234857  -0.294107  -0.924269  -0.726938  -0.866717   0.628073   0.200321  -0.570518  -0.354074  -0.354074   -0.853198  -0.257514  -0.893317   0.989837   1.522578  -0.025407  -0.855447  -0.863820   0.165606   0.144490  -0.789154  -0.958560  -0.958560    0.139452  -0.798650  -0.544236  -0.565451   0.042123  -0.255916  -1.086872  -1.103735   0.168919   0.296781   0.225171  -1.234254  -1.234254   -0.048040  -0.048040  -0.048040  -0.048040  -0.048040  -0.048040  -0.048040  0.093453  0.093453  0.093453  0.093453  0.093453  0.093453  0.093453  0.093453  0.093453  0.093453  0.093453  0.093453  0.093453  0.093453  0.093453  0.093453  0.093453  0.093453  0.093453  0.093453  0.093453  0.093453  0.093453   0.041030   0.041030   0.041030   0.041030   0.041030   0.041030   0.041030   0.041030   0.041030   0.041030    1
dtype: int64
co         cov        hlv        ma_ol      ma_ho      ma_hl      ma_hlv     ma_cov     lag_ol_1   lag_ho_1    lag_hl_1    lag_hlv_1  lag_cov_1  lag_vol_1  sum_vol_1  mean_vol_1  lag_ol_2   lag_ho_2   lag_hl_2   lag_hlv_2  lag_cov_2  sd_ol_2    sd_ho_2    sd_hl_2    sd_hlv_2   sd_cov_2   lag_vol_2  sum_vol_2  mean_vol_2  lag_ol_3   lag_ho_3   lag_hl_3   lag_hlv_3  lag_cov_3  sd_ol_3    sd_ho_3    sd_hl_3    sd_hlv_3   sd_cov_3   lag_vol_3  sum_vol_3  mean_vol_3  lag_ol_4   lag_ho_4   lag_hl_4   lag_hlv_4  lag_cov_4  sd_ol_4    sd_ho_4    sd_hl_4    sd_hlv_4   sd_cov_4   lag_vol_4  sum_vol_4  mean_vol_4  sym_p_0    sym_p_1    sym_p_2    sym_p_3    sym_p_4    sym_p_5    sym_p_6    sym_p_7    sym_p_8    sym_p_9    sym_p_10   sym_p_11   sym_p_12   sym_p_13   sym_p_14   sym_p_15   sym_p_16   sym_p_17   sym_p_18   sym_p_19   sym_p_20   sym_p_21   sym_p_22   sym_p_23   sym_p_24   sym_p_25   sym_p_26   sym_p_27   sym_p_28   sym_p_29   sym_s_20   sym_s_21   sym_s_22   sym_s_23   sym_s_24   sym_s_25   sym_s_26   sym_s_27   sym_s_28   sym_s_29 
-4.739625  -0.382283  -0.387419   2.884034   0.052604   1.218489   1.046407   2.080673   0.522588   0.177709    0.421954    1.075165   0.769404  -0.725853   0.759610   0.759610   -1.156372   0.436156  -0.098253   1.379279   2.314322   4.989852   0.066546   1.297366   0.441740   0.189294  -0.783107   0.026671   0.026671   -1.161167  -0.164253  -0.703103   0.467186   1.287995   5.358520  -0.017087   1.129429   0.189961   0.875595  -0.802699  -0.702946  -0.702946   -0.053780   0.267637   0.244717  -0.533564   0.332106   5.374648  -0.168326   1.130076  -0.074796   0.524173   0.155863  -1.106394  -1.106394    0.460801   0.460801   0.460801   0.460801   0.460801   0.460801   0.460801   0.600182   0.600182   0.600182   0.600182   0.600182   0.600182   0.600182   0.600182   0.600182   0.600182   0.600182   0.600182   0.600182   0.600182   0.600182   0.600182   0.600182   0.600182   0.600182   0.600182   0.600182   0.600182   0.600182  -0.066429  -0.066429  -0.066429  -0.066429  -0.066429  -0.066429  -0.066429  -0.066429  -0.066429  -0.066429    1
 0.427935   0.383688  -0.298679  -1.145059   4.512315   4.308171   1.415040   0.780050  -0.791553   10.740368   10.446360   0.430153   0.623041   0.622148  -0.511944  -0.511944   -0.047623  -0.346299  -0.370925  -0.198284  -0.225537  -0.645141   7.971733   7.891730  -0.228179  -0.661551  -0.644703   0.094146   0.094146   -0.609928   0.423193   0.142995   1.052540   2.203752  -0.760619   6.926142   6.809386  -0.608969  -0.483786  -0.748747  -0.498564  -0.498564   -0.250134  -0.597829  -0.718080   1.736837  -1.370227  -1.042259   5.919791   5.859288  -0.324659   0.354975  -0.841642  -0.912447  -0.912447    0.577774   0.577774   0.577774   0.577774   0.577774   0.577774   0.577774   0.660388   0.660388   0.660388   0.660388   0.660388   0.660388   0.660388   0.660388   0.660388   0.660388   0.660388   0.660388   0.660388   0.660388   0.660388   0.660388   0.660388   0.660388   0.660388   0.660388   0.660388   0.660388   0.660388  -0.063000  -0.063000  -0.063000  -0.063000  -0.063000  -0.063000  -0.063000  -0.063000  -0.063000  -0.063000    1
 0.206234   0.287444  -0.441991  -0.436916  -0.180862  -0.367357   1.147172  -1.683398  -0.230137  -0.605457   -0.716654   -0.281423  -0.145940  -0.717923  -0.564304  -0.564304   -0.971325   1.208820   0.765982  -0.584474   0.240387  -0.321468  -0.146546  -0.481500  -0.747156  -0.487451   0.895400  -1.079979  -1.079979    1.449534  -0.512615   0.156048  -0.074853  -0.737354  -0.671098   0.457071   0.220319  -0.964239  -0.785916  -0.563170  -0.361419  -0.361419   -0.047107  -0.424287  -0.449244   3.597961  -3.198932   0.374953   0.311038   0.045126  -0.961835  -0.503419  -0.843509  -0.675966  -0.675966    0.774237   0.774237   0.774237   0.774237   0.774237   0.774237   0.774237   0.876218   0.876218   0.876218   0.876218   0.876218   0.876218   0.876218   0.876218   0.876218   0.876218   0.876218   0.876218   0.876218   0.876218   0.876218   0.876218   0.876218   0.876218   0.876218   0.876218   0.876218   0.876218   0.876218  -0.062852  -0.062852  -0.062852  -0.062852  -0.062852  -0.062852  -0.062852  -0.062852  -0.062852  -0.062852    1
 0.212456   0.249480  -0.464141   0.045373  -0.770645  -0.796338  -0.845258   0.217392  -0.779293  -0.163798   -0.527322   -0.190641   0.516751  -0.689411  -0.442965  -0.442965    0.756207  -0.514516  -0.166688  -0.665933   0.055461  -0.471234  -0.490365  -0.478434  -0.644056  -0.635483   0.909847  -0.953895  -0.953895    0.570676  -0.420922  -0.158895  -0.438091   0.032565  -0.162692  -0.449841  -0.552900  -0.836624  -0.789438  -0.373045  -0.207923  -0.207923   -0.206756  -0.419096  -0.517938   0.140679  -0.415208  -0.367264  -0.527773  -0.593731  -0.989952  -0.892190  -0.759435  -0.423804  -0.423804   -0.870562  -0.870562  -0.870562  -0.870562  -0.870562  -0.870562  -0.870562  -0.861589  -0.861589  -0.861589  -0.861589  -0.861589  -0.861589  -0.861589  -0.861589  -0.861589  -0.861589  -0.861589  -0.861589  -0.861589  -0.861589  -0.861589  -0.861589  -0.861589  -0.861589  -0.861589  -0.861589  -0.861589  -0.861589  -0.861589  -0.057683  -0.057683  -0.057683  -0.057683  -0.057683  -0.057683  -0.057683  -0.057683  -0.057683  -0.057683    1
 0.218526   0.298987  -0.514162   0.430095   0.397978   0.594114   1.351358   0.140323  -1.160381  -0.424028   -0.966564    3.673508   0.145463  -0.849346  -0.571809  -0.571809    3.365013   0.317366   1.883878  -0.614344   0.067241  -0.457598  -0.560357  -0.401213   2.929901  -0.748379   2.532886  -1.197121  -1.197121   -0.966747   1.849445   1.411904  -0.267780   0.851430   2.331821  -0.155977   1.156285   2.414869  -0.962197  -0.044696   1.040312   1.040312    0.193907  -0.417209  -0.330513   0.299348  -1.083551   2.099941   0.598714   1.012369   1.986289  -0.714769  -0.748593   0.759873   0.759873    0.930201   0.930201   0.930201   0.930201   0.930201   0.930201   0.930201   1.026684   1.026684   1.026684   1.026684   1.026684   1.026684   1.026684   1.026684   1.026684   1.026684   1.026684   1.026684   1.026684   1.026684   1.026684   1.026684   1.026684   1.026684   1.026684   1.026684   1.026684   1.026684   1.026684  -0.034599  -0.034599  -0.034599  -0.034599  -0.034599  -0.034599  -0.034599  -0.034599  -0.034599  -0.034599    1
dtype: int64

PLOT: MULTI PLOT HISTOGRAMS

In [7]:
# PLOT histogram (Features)
plot_2_1 = vs.plot_h_histograms(p_data=exp_train_x.iloc[:, 0:9])

# Show plot
plot_2_1.show()
In [8]:
# PLOT histogram (Features)
plot_2_2 = vs.plot_h_histograms(p_data=exp_train_x.iloc[:, -10:])

# Show plot
plot_2_2.show()

PLOT: HEATMAP CORRELATIONS

In [9]:
# -- Target and Auto regressive Features correlation
exp_1 = pd.concat([exp_train_y.copy(), exp_train_x.iloc[:, 0:55].copy()], axis=1)
exp_1_corr_p = exp_1.corr('pearson')
title_txt = 'Linear-Autoregressive Features Vs Target Correlation (pearson)'
exp_1_plot = vs.plot_heatmap_corr(p_data=exp_1_corr_p.copy(), p_title=title_txt)

# Show plot
exp_1_plot.show()
In [10]:
# -- Target and Symbolic Features correlation
exp_2 = pd.concat([exp_train_y.copy(), exp_train_x.iloc[:, -40:].copy()], axis=1)
exp_2_corr_p = exp_1.corr('pearson')
title_txt = 'Symbolic Features Vs Target Correlation (pearson)'
exp_2_plot = vs.plot_heatmap_corr(p_data=exp_1_corr_p.copy(), p_title=title_txt)

# Show plot
exp_2_plot.show()

PLOT: All ROCs in FOLD

In [11]:
# case to plot
case = 'met_max'

# data subset to use
subset = 'train'

# metric to use
metric_case = 'acc-train'

# Model to evaluate
model_case = 'ann-mlp'

# period 
period_case = 'y_2012'

# parameters of the evaluated models
d_params = memory_palace[period_case][model_case]['p_hof']['hof']

# get all fps and tps for a particular model in a particular fold
d_plot_4 = {i: {'tpr': memory_palace[period_case][model_case]['e_hof'][i]['metrics'][subset]['tpr'],
                'fpr': memory_palace[period_case][model_case]['e_hof'][i]['metrics'][subset]['fpr'],
                metric_case: memory_palace[period_case][model_case]['e_hof'][i]['pro-metrics'][metric_case]}
            for i in range(0, len(d_params))}

# Plot title
dt.theme_plot_4['p_labels']['title'] = 'in Fold max & min ' + metric_case + ' ' + subset + ' data'

# Timeseries of the AUCs
plot_4 = vs.plot_multiroc(p_data=d_plot_4, p_metric=metric_case, p_theme=dt.theme_plot_4)

# Show plot in script
plot_4.show()

PLOT: CLASSIFICATION FOLD RESULTS

In [12]:
# Pick case
case = 'met_max'

# Pick model to generate the plot
model_case = 'ann-mlp'

# Generate title
plot_title = 'inFold ' + case + ' for: ' + model_case + ' ' + met_cases[model_case][case]['period']

# Plot title
dt.theme_plot_3['p_labels']['title'] = plot_title

# Get data from met_cases
train_y = met_cases[model_case][case]['data']['results']['data']['train']

# Get data for prices and predictions
ohlc_prices = folds[met_cases[model_case][case]['period']]

ohlc_class = {'train_y': train_y['train_y'], 'train_y_pred': train_y['train_pred_y']}

# Dates for vertical lines in the T-Folds plot
date_vlines = [ohlc_class['train_y'].index[-1]]

# Make plot
plot_3 = vs.plot_ohlc_class(p_ohlc=ohlc_prices, p_theme=dt.theme_plot_3, p_data_class=ohlc_class, 
                            p_vlines=date_vlines)

# Show plot in script
plot_3.show()

MIN, MAX, MODE CASES

In [13]:
# models to explore results
model_case = ml_models[1]

# period of the best of HoF: according to model_case and metric_case 
maxcase_period = met_cases[model_case]['met_max']['period']
maxcase_params = met_cases[model_case]['met_max']['params']
maxcase_metric = met_cases[model_case]['met_max'][metric_case]

# output DataFrame
df_max = pd.DataFrame([met_cases[model_case]['met_max']['data']['pro-metrics']]).T
df_max.columns = [maxcase_period]
df_max.index.name=model_case + '-metrics'
df_max.head(5)
Out[13]:
y_2012
ann-mlp-metrics
acc-train 0.902628
acc-val 0.518519
acc-mean 0.710574
acc-diff 0.384109
acc-weighted 0.412904
In [14]:
# period of the worst of HoF: according to model_case and metric_case 
mincase_period = met_cases[model_case]['met_min']['period']
mincase_params = met_cases[model_case]['met_min']['params']
mincase_metric = met_cases[model_case]['met_min'][metric_case]

# output DataFrame
df_min = pd.DataFrame([met_cases[model_case]['met_min']['data']['pro-metrics']]).T
df_min.columns = [mincase_period]
df_min.index.name=model_case + '-metrics'
df_min.head(5)
Out[14]:
y_2010
ann-mlp-metrics
acc-train 0.613601
acc-val 0.617284
acc-mean 0.615444
acc-diff 0.003683
acc-weighted 0.307170
In [15]:
# Modes and their params, no. of repetitions and periods.
mode_repetitions = pd.DataFrame(met_cases[model_case]['met_mode']['data']).T
mode_repetitions.head(5)
Out[15]:
params repetitions periods
(2, 75, 'relu', [0, 0], [0, 0], 0.005, 0.055, 0.4) (2, 75, relu, [0, 0], [0, 0], 0.005, 0.055, 0.4) 0 [y_2009]
(2, 105, 'relu', [0, 0], [0, 0], 0.01, 0.055, 0.07) (2, 105, relu, [0, 0], [0, 0], 0.01, 0.055, 0.07) 0 [y_2009]
(2, 100, 'relu', [0, 0], [0, 0], 0.005, 0.055, 0.1) (2, 100, relu, [0, 0], [0, 0], 0.005, 0.055, 0.1) 0 [y_2009]
(2, 105, 'relu', [0, 0], [0, 0], 0.005, 0.055, 0.001) (2, 105, relu, [0, 0], [0, 0], 0.005, 0.055, 0... 0 [y_2009]
(2, 100, 'relu', [0, 0], [0, 0], 0.02, 0.05, 0.2) (2, 100, relu, [0, 0], [0, 0], 0.02, 0.05, 0.2) 0 [y_2009]